library(rstan)
Loading required package: StanHeaders
Loading required package: ggplot2
rstan (Version 2.21.1, GitRev: 2e1f913d3ca3)
For execution on a local, multicore CPU with excess RAM we recommend calling
options(mc.cores = parallel::detectCores()).
To avoid recompilation of unchanged Stan programs, we recommend calling
rstan_options(auto_write = TRUE)
library(survival)
library(tidyverse)
Registered S3 methods overwritten by 'dbplyr':
  method         from
  print.tbl_lazy     
  print.tbl_sql      
── Attaching packages ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── tidyverse 1.3.1 ──
✓ tibble  3.1.4     ✓ dplyr   1.0.7
✓ tidyr   1.1.3     ✓ stringr 1.4.0
✓ readr   2.0.1     ✓ forcats 0.5.1
✓ purrr   0.3.4     
── Conflicts ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── tidyverse_conflicts() ──
x tidyr::extract() masks rstan::extract()
x dplyr::filter()  masks stats::filter()
x dplyr::lag()     masks stats::lag()
library(tidybayes)
library(scales)

Attaching package: ‘scales’

The following object is masked from ‘package:purrr’:

    discard

The following object is masked from ‘package:readr’:

    col_factor
# data, parameters, model and generated quantities blocks
Stan_exponential_survival_model <- "
data{
  int <lower=1> N_uncensored;
  int <lower=1> N_censored;
  int <lower=0> numCovariates;
  matrix[N_censored, numCovariates] X_censored;
  matrix[N_uncensored, numCovariates] X_uncensored;
  vector <lower=0>[N_censored] times_censored;
  vector <lower=0>[N_uncensored] times_uncensored;
}

parameters{
  vector[numCovariates] beta; //regression coefficients
  real alpha; //intercept
}

model{
  beta ~ normal(0,10); //prior on regression coefficients
  alpha ~ normal(0,10); //prior on intercept
  target += exponential_lpdf(times_uncensored | exp(alpha+X_uncensored * beta)); //log-likelihood part for uncensored times
  target += exponential_lccdf(times_censored | exp(alpha+X_censored * beta)); //log-likelihood for censored times
}

generated quantities{
  vector[N_uncensored] times_uncensored_sampled; //prediction of death
  for(i in 1:N_uncensored) {
    times_uncensored_sampled[i] = exponential_rng(exp(alpha+X_uncensored[i,]* beta));
  }
}
"
# prepare the data
set.seed(42); 
require (tidyverse);
data <- read_csv('../data/necessary_fields.csv')
Rows: 2066 Columns: 7
── Column specification ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
Delimiter: ","
chr (1): host_type
dbl (1): duration_months
lgl (5): major_releases, censored, high_rev_freq, multi_repo, high_author_count

ℹ Use `spec()` to retrieve the full column specification for this data.
ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
N <- nrow (data);
data$high_author_count <- car::recode(data$high_author_count, "'TRUE' = 0; 'FALSE' = 1")
data$censored <- car::recode(data$censored, "'TRUE' = 0; 'FALSE' = 1")
X <- as.matrix(pull(data, high_author_count)); 
is_censored <- pull(data, censored)==0; 
times <- pull(data, duration_months); 
msk_censored <- is_censored == 1; 
N_censored <- sum(msk_censored);
# put data into a list for Stan
Stan_data <- list (N_uncensored = N - N_censored, 
                    N_censored = N_censored,
                    numCovariates = ncol(X), 
                    X_censored = as.matrix(X[msk_censored,]),
                    X_uncensored = as.matrix(X[!msk_censored ,]), 
                    times_censored = times[msk_censored],
                    times_uncensored = times[!msk_censored])
# fit Stan model
require(rstan)
exp_surv_model_fit <- suppressMessages(stan(model_code = Stan_exponential_survival_model, data = Stan_data))
sh: Data/bayesian: No such file or directory
sh: clang++ -mmacosx-version-min=10.13: command not found
Warning in system2(CXX, args = ARGS) : error in running command
Warning in file.remove(c(unprocessed, processed)) :
  cannot remove file '/var/folders/q8/7tchbyvd1dj3hkgw5ffkk6ph0000gp/T//RtmprAVZOZ/file2e643b696285.stan', reason 'No such file or directory'

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 0.000472 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 4.72 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 1.79959 seconds (Warm-up)
Chain 1:                2.08414 seconds (Sampling)
Chain 1:                3.88373 seconds (Total)
Chain 1: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 2).
Chain 2: 
Chain 2: Gradient evaluation took 0.00021 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 2.1 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2: 
Chain 2: 
Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 1.94447 seconds (Warm-up)
Chain 2:                1.91101 seconds (Sampling)
Chain 2:                3.85547 seconds (Total)
Chain 2: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 3).
Chain 3: 
Chain 3: Gradient evaluation took 0.000197 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 1.97 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 1.85257 seconds (Warm-up)
Chain 3:                1.84805 seconds (Sampling)
Chain 3:                3.70063 seconds (Total)
Chain 3: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 4).
Chain 4: 
Chain 4: Gradient evaluation took 0.000243 seconds
Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 2.43 seconds.
Chain 4: Adjust your expectations accordingly!
Chain 4: 
Chain 4: 
Chain 4: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 4: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 4: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 4: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 4: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 4: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 4: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 4: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 4: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 4: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 4: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 4: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 4: 
Chain 4:  Elapsed Time: 1.765 seconds (Warm-up)
Chain 4:                2.03177 seconds (Sampling)
Chain 4:                3.79676 seconds (Total)
Chain 4: 
# print model fit
print(get_seed(exp_surv_model_fit))
[1] 1781592037
# print fit summary
fit_summary <- summary(exp_surv_model_fit)
print(fit_summary$summary)
                                       mean     se_mean           sd          2.5%           25%           50%          75%        97.5%     n_eff      Rhat
beta[1]                           0.9610668 0.003469243   0.09864530     0.7662478     0.8964092     0.9599723     1.025412     1.157458  808.5063 1.0030074
alpha                            -5.5702904 0.003259977   0.09268295    -5.7530200    -5.6312316    -5.5684936    -5.507378    -5.387270  808.2966 1.0028752
times_uncensored_sampled[1]     101.4007616 1.579621665  99.28152824     3.1663896    30.0705682    71.1079952   141.571492   367.335410 3950.3061 0.9992525
times_uncensored_sampled[2]      99.8674940 1.650189014  98.81590701     2.6235553    29.0850175    69.0509403   142.613116   355.883333 3585.8023 0.9999019
times_uncensored_sampled[3]      99.7254321 1.606082397 100.17309805     2.8178707    29.3295996    69.2424462   138.136578   375.291482 3890.1520 1.0005881
times_uncensored_sampled[4]     101.5660981 1.611727202  98.53597710     2.6310402    30.8346605    72.8571719   140.246676   360.458331 3737.7184 1.0007518
times_uncensored_sampled[5]      98.6962559 1.690417174 100.59410450     1.9990443    26.8521824    66.1102421   135.098469   365.143576 3541.2555 1.0000183
times_uncensored_sampled[6]     101.4598013 1.620750674 100.03240473     2.5743604    29.7873639    71.6273625   138.879095   370.618112 3809.3335 0.9999513
times_uncensored_sampled[7]     101.1431563 1.634950386 104.09738844     2.3081476    29.1865746    70.2003937   137.658540   381.073256 4053.8765 0.9999765
times_uncensored_sampled[8]      98.9337840 1.583065017  99.35095712     2.5270233    29.3593913    68.6434380   138.402479   368.169193 3938.6429 0.9995024
times_uncensored_sampled[9]     101.6496298 1.565374770  98.06324682     2.5913941    30.2829320    73.7378852   140.908332   373.085534 3924.4236 0.9994005
times_uncensored_sampled[10]    102.4643298 1.661085391 103.96221644     2.6974187    29.1259119    69.8830345   142.351890   378.348536 3917.1224 1.0007666
times_uncensored_sampled[11]     99.8983379 1.564914995 102.75569355     2.7272146    28.1834477    68.2245622   135.800533   383.388905 4311.5188 1.0004416
times_uncensored_sampled[12]    103.8273646 1.668850449 104.10405948     2.4479161    28.4554992    70.7262366   145.691189   382.322586 3891.3518 0.9999383
times_uncensored_sampled[13]    100.9167185 1.602960451 105.23319844     2.6251072    28.6545466    66.9158948   135.110649   380.164265 4309.8279 1.0000070
times_uncensored_sampled[14]    100.3434487 1.691310420 101.51880870     2.4835215    27.2307201    68.3313511   140.962114   380.146838 3602.8516 1.0011187
times_uncensored_sampled[15]     99.9792842 1.677036474 104.09197843     2.4569254    29.5752085    67.3836452   135.115862   370.107965 3852.5609 0.9996538
times_uncensored_sampled[16]     98.9432885 1.538469434  96.50244932     2.7328610    30.0626784    69.7344608   137.253657   356.102743 3934.5850 1.0006864
times_uncensored_sampled[17]    100.5680984 1.593588526 101.07109474     2.3656542    29.0821363    70.2374611   140.553065   378.920948 4022.5509 1.0004719
times_uncensored_sampled[18]     98.1952798 1.540909553  96.64714695     2.3368066    29.0490755    68.2979261   139.083927   356.590003 3933.9042 0.9998619
times_uncensored_sampled[19]     99.4638958 1.501457159  96.41548754     2.3420991    29.7652979    70.2084221   139.412060   353.211124 4123.5163 1.0000883
times_uncensored_sampled[20]    100.1389257 1.551265855 100.39686227     2.7963286    29.3255387    67.7669214   136.378719   373.375593 4188.5896 1.0003627
times_uncensored_sampled[21]    101.3416623 1.596288946 102.31952362     2.5086553    28.3483812    68.8377345   141.559967   384.395139 4108.6014 0.9999264
times_uncensored_sampled[22]    103.0978279 1.632757628 102.93945763     3.0625646    29.9585805    71.2550232   141.784395   374.699024 3974.8460 0.9997074
times_uncensored_sampled[23]    100.8922851 1.547649201  98.33391699     2.9453731    28.8955884    72.0470565   141.863026   372.726888 4037.0267 1.0001818
times_uncensored_sampled[24]    103.7992741 1.905380858 101.49756641     2.9858914    29.6986331    71.3053140   143.610993   376.930974 2837.5771 1.0017859
times_uncensored_sampled[25]     99.3020157 1.620669233 100.96920723     3.0826676    29.2715722    68.1121017   137.548704   370.765488 3881.4064 1.0003501
times_uncensored_sampled[26]    102.4638590 1.734846786 104.37873161     2.6898286    28.6144758    69.8170094   138.826640   376.618414 3619.9434 0.9998252
times_uncensored_sampled[27]    102.6901808 1.643278270 102.18899071     2.5205700    31.2300601    72.8582956   138.536532   368.584367 3867.1053 1.0000300
times_uncensored_sampled[28]     99.1673112 1.533795120  97.46302386     2.3270354    28.3309493    69.5676199   137.330148   355.321715 4037.8024 1.0001796
times_uncensored_sampled[29]     98.9146521 1.641131260  98.69487651     2.6817901    28.6743417    67.9892968   139.392290   359.377369 3616.6175 1.0004195
times_uncensored_sampled[30]     99.7620452 1.534652680  98.93953682     2.7977234    28.8503989    69.6760369   138.129868   359.539162 4156.4211 1.0002208
times_uncensored_sampled[31]    102.4552118 1.705786729 101.28420729     2.3299213    30.4646982    70.2029180   140.733408   375.809153 3525.6078 0.9997550
times_uncensored_sampled[32]    104.0016602 1.627267226 103.79922279     2.7568610    30.2577453    72.4506572   143.103245   385.081924 4068.8385 0.9995274
times_uncensored_sampled[33]    102.1136935 1.620753462 100.64987336     2.6288861    29.2045194    71.8093887   140.752185   370.924405 3856.4930 1.0002365
times_uncensored_sampled[34]    101.4097733 1.653739910 102.44295821     2.3306596    28.1569750    70.9830403   141.452379   385.943776 3837.3358 0.9999280
times_uncensored_sampled[35]     99.4260666 1.573452004  98.82834651     2.5782976    29.7909258    70.6565222   139.491705   371.499811 3945.0822 1.0010901
times_uncensored_sampled[36]     98.6213940 1.642587741 100.32401365     2.0679316    26.5304457    69.1079149   137.000961   364.615390 3730.3763 1.0002556
times_uncensored_sampled[37]     99.6320020 1.532822403  96.79202022     3.1174462    29.4904568    71.3058339   137.114916   361.657812 3987.4517 0.9998778
times_uncensored_sampled[38]    100.6688368 1.580810575  99.35169261     2.6760522    28.8581944    69.4260044   141.599705   369.192804 3949.9434 0.9995329
times_uncensored_sampled[39]     99.8605247 1.606684079  99.66322137     2.3737557    29.5792362    69.6953472   139.126799   365.722000 3847.7678 1.0005542
times_uncensored_sampled[40]    100.6406664 1.644937771  99.60585349     2.4320771    28.6913451    70.4824327   141.818871   359.913034 3666.6611 0.9993083
times_uncensored_sampled[41]    103.8623635 1.634267871 102.06391247     2.4844398    29.7329374    73.0194921   144.543404   374.970685 3900.2993 1.0006517
times_uncensored_sampled[42]    101.7245265 1.576633585  99.96115449     2.1828391    29.1317177    71.3159636   141.323483   367.665765 4019.7679 1.0005205
times_uncensored_sampled[43]    103.9100094 1.647498906 103.29140218     2.9860819    32.0317672    73.8496706   141.840128   380.805314 3930.7740 0.9991513
times_uncensored_sampled[44]    100.6544524 1.592531682  99.54032536     2.8053084    30.1159524    70.6961084   138.980894   368.464521 3906.8069 1.0003501
times_uncensored_sampled[45]     99.8773502 1.608043418  99.93374210     2.6648112    28.9467675    68.3868849   139.002255   369.260868 3862.1466 1.0001708
times_uncensored_sampled[46]     98.7347071 1.617593512 100.77150371     2.5432323    28.4647770    67.4847609   136.639953   372.219783 3880.9378 0.9997949
times_uncensored_sampled[47]     98.6015038 1.617431472 100.98683748     2.4497314    27.9814346    68.2326300   134.235876   376.521962 3898.3225 0.9996416
times_uncensored_sampled[48]    101.3695341 1.596158544 103.31799526     2.7444437    27.9951623    69.3620589   142.438706   382.107312 4189.8636 0.9995612
times_uncensored_sampled[49]     98.9684035 1.603559749 100.01520097     2.4068339    28.5877136    70.9291945   136.476897   374.513017 3890.1086 0.9991667
times_uncensored_sampled[50]    100.0600020 1.654733731 103.18179697     2.5274968    27.9708746    67.8523456   137.749604   380.082290 3888.2120 0.9998109
times_uncensored_sampled[51]    100.6339179 1.668504757 100.90474014     2.6580141    28.0223386    69.8169823   138.893476   377.272925 3657.3644 0.9994740
times_uncensored_sampled[52]    101.2733583 1.618181032 100.35192055     2.4305453    30.2455717    71.3383310   140.625370   364.934841 3845.8927 0.9991729
times_uncensored_sampled[53]     99.6968587 1.592663669 100.43574127     2.1989698    29.1312226    69.5604673   136.320885   373.313665 3976.7513 0.9995807
times_uncensored_sampled[54]     99.2555014 1.685804779  97.71950624     2.3769064    29.4269087    70.3015035   135.705418   357.786640 3360.0672 1.0003406
times_uncensored_sampled[55]    100.7606272 1.602585914 103.20357411     2.5615390    29.1158433    68.0186372   136.830168   383.056190 4147.1222 0.9994995
times_uncensored_sampled[56]     98.8235280 1.558602397  98.26204237     2.1952417    26.4684516    67.7546516   141.245509   357.120265 3974.6683 0.9996522
times_uncensored_sampled[57]    101.1561216 1.664334764 103.87590151     2.5605195    28.9415522    69.4458671   137.609171   378.696127 3895.3658 1.0001552
times_uncensored_sampled[58]    100.0354126 1.619929128  99.84477294     2.3379141    29.6062871    68.8314915   137.488470   363.528799 3798.9068 0.9999869
times_uncensored_sampled[59]    100.6814514 1.586591788 100.76473871     2.5277112    30.0326446    69.4509516   137.655035   378.190391 4033.5437 0.9998752
times_uncensored_sampled[60]    100.4849704 1.545261806 100.14102192     2.4856512    28.9254610    69.6004950   142.697606   366.139571 4199.7158 0.9997204
times_uncensored_sampled[61]    100.3823089 1.590368193 101.62470852     2.2960232    29.1733661    69.6809506   137.180454   371.023317 4083.2245 1.0006545
times_uncensored_sampled[62]    102.1880806 1.593646404 100.92367347     3.2054312    31.8402350    71.8708046   139.934912   374.436975 4010.5337 0.9997584
times_uncensored_sampled[63]    102.1109559 1.678009997 102.63049620     2.2101561    28.5714370    70.9498818   141.385892   371.793190 3740.7938 0.9998071
times_uncensored_sampled[64]    101.2824505 1.632432351  98.64354758     2.6577523    30.3843893    70.1015489   143.497273   365.911625 3651.4636 1.0002328
times_uncensored_sampled[65]    100.5779011 1.607443956 100.88651908     2.4901288    28.3332767    69.8325328   140.735568   367.641086 3939.0781 0.9995665
times_uncensored_sampled[66]     99.8520256 1.530828631  98.91160690     3.1543502    28.9649613    70.2495458   138.739215   368.100469 4174.8547 0.9994002
times_uncensored_sampled[67]    100.6833393 1.565037048  98.57380346     2.2888389    29.5237410    68.8641914   141.684028   367.858006 3967.1058 1.0006201
times_uncensored_sampled[68]    102.3528226 1.639415797 102.79093217     2.7641136    29.5535080    70.4509394   141.690821   368.664738 3931.2565 0.9993744
times_uncensored_sampled[69]    101.1736573 1.589817746 103.10678575     2.4018895    27.6702823    70.3216005   140.342264   381.419243 4206.1022 1.0000502
times_uncensored_sampled[70]    100.6658694 1.588249256  98.77472249     2.3565076    28.6109942    69.7953940   144.573921   357.619236 3867.7137 0.9994157
times_uncensored_sampled[71]     99.8934172 1.509919733  96.30698908     3.0097468    30.0606571    70.2152167   140.633948   357.327229 4068.2524 0.9996342
times_uncensored_sampled[72]     97.7411180 1.603410819  99.70578290     2.7640139    28.2390705    67.4748281   134.805557   373.671190 3866.7944 0.9999274
times_uncensored_sampled[73]     99.9086500 1.658712014  98.72480948     2.3969666    28.7753713    69.3831869   139.772320   364.940669 3542.5063 0.9994832
times_uncensored_sampled[74]    102.2037839 1.618139570 103.29394196     2.3335256    28.3188644    70.0509134   142.412347   384.518996 4074.9074 1.0002697
times_uncensored_sampled[75]    101.3108096 1.685144920 101.99806401     2.4177107    28.6058668    67.3665735   141.808731   381.514382 3663.6109 0.9998487
times_uncensored_sampled[76]    100.0380304 1.631054136 102.96088829     2.2003227    27.0576554    68.1600702   138.675137   384.352661 3984.8118 0.9996969
times_uncensored_sampled[77]    101.6743376 1.597353138 100.58480463     2.6376923    30.1471815    70.3125716   140.368644   371.158289 3965.1797 1.0001037
times_uncensored_sampled[78]     98.2503343 1.634409147 100.17178302     2.0639269    28.9163959    66.9410465   137.019832   363.234240 3756.3776 0.9998128
times_uncensored_sampled[79]    100.5972538 1.642183993 100.03140564     2.6701801    29.5732454    70.1697681   140.106848   363.991113 3710.4716 0.9996782
times_uncensored_sampled[80]    102.6351587 1.638485616 104.04326114     2.7876907    29.5408746    70.6619431   140.861902   386.779929 4032.2054 0.9992672
times_uncensored_sampled[81]    100.2649587 1.652985511 102.70449978     2.7923013    28.8636105    68.1022840   136.479248   375.838879 3860.4759 0.9993682
times_uncensored_sampled[82]    100.0768307 1.567083951 100.01916616     2.3937711    28.4415317    68.3720457   139.903532   375.719381 4073.6333 0.9999585
times_uncensored_sampled[83]    100.1468588 1.531410132  97.59235073     2.3141427    28.9037724    71.1687203   139.923489   347.936984 4061.1453 0.9997024
times_uncensored_sampled[84]    103.9296480 1.673775959 105.28222510     2.4244231    29.5937324    71.0708172   143.306291   388.598090 3956.5391 0.9994414
times_uncensored_sampled[85]    102.1305428 1.655146536 103.71796944     2.2370995    29.2913203    69.2358323   140.994049   381.185355 3926.7668 1.0000299
times_uncensored_sampled[86]     98.9726324 1.571608482  99.97827138     2.6617436    28.2947595    68.7545149   137.059297   362.258778 4046.9004 0.9992148
times_uncensored_sampled[87]     98.6867424 1.663617083 100.39366596     2.5429066    27.8162488    66.4037481   135.035546   366.279187 3641.7144 1.0005194
times_uncensored_sampled[88]    102.0335427 1.554155985 100.34463410     2.6562723    28.6932637    71.9099501   142.871107   374.665348 4168.6852 0.9997515
times_uncensored_sampled[89]     98.9898092 1.595903899 101.77530496     2.5908472    27.4182303    67.7155495   136.105703   371.040805 4066.9736 1.0004778
times_uncensored_sampled[90]    101.5090084 1.636672841 103.59211906     2.5084052    27.4999140    68.3907634   140.077744   368.646561 4006.1728 0.9996835
times_uncensored_sampled[91]     98.1556334 1.576525273  97.04898713     2.3652807    27.5780474    68.0264928   137.679495   355.848367 3789.4845 1.0007971
times_uncensored_sampled[92]     99.5970802 1.666685203 100.82706148     2.2698478    27.9087690    67.9152724   138.060001   365.286256 3659.7133 0.9994646
times_uncensored_sampled[93]    101.5815642 1.655934829 101.61201619     2.0763661    28.0261219    70.2450552   140.059959   373.962017 3765.3353 1.0000585
times_uncensored_sampled[94]     99.7524186 1.557855972  98.03061430     2.5600517    28.3042517    69.8383784   139.381542   353.097261 3959.7598 0.9997430
times_uncensored_sampled[95]    100.7637657 1.606235268 100.33513152     2.4639736    29.0499588    70.4971321   142.230029   364.628866 3902.0042 0.9998357
times_uncensored_sampled[96]    101.5112267 1.689558031 104.14613749     2.0722841    28.7269151    70.2354480   139.521292   377.751501 3799.6195 0.9995552
times_uncensored_sampled[97]    101.9205645 1.675397947 101.53484738     2.5774725    28.7931825    70.1994251   144.568393   366.343129 3672.7747 1.0004150
times_uncensored_sampled[98]    103.9088797 1.718623618 105.83285306     2.6449154    28.1632443    70.8990062   146.221107   391.305317 3792.0972 1.0004793
 [ reached getOption("max.print") -- omitted 678 rows ]
exp_surv_model_draws <- tidybayes::tidy_draws(exp_surv_model_fit)
exp_surv_model_draws
## Constructor for Strata-specific survival function
construct_survival_function <- function(alpha, beta, x) {
    function(t) {
        lambda <- exp(alpha + x*beta)
        exp(-(lambda * t))
    }
}

## Random functions
exp_surv_model_surv_func <-
    exp_surv_model_draws %>%
    select(.chain, .iteration, .draw, alpha, `beta[1]`) %>%
    ## Simplify name
    rename(beta = `beta[1]`) %>%
    ## Construct realization of random functions
    mutate(`S(t|1)` = pmap(list(alpha, beta), function(a,b) {construct_survival_function(a,b,1)}),
           `S(t|0)` = pmap(list(alpha, beta), function(a,b) {construct_survival_function(a,b,0)}))
exp_surv_model_surv_func
times <- seq(from = 0, to = 165, by = 0.1)
times_df <- data_frame(t = times)
Warning: `data_frame()` was deprecated in tibble 1.1.0.
Please use `tibble()` instead.
This warning is displayed once every 8 hours.
Call `lifecycle::last_lifecycle_warnings()` to see where this warning was generated.
## Try first realizations
exp_surv_model_surv_func$`S(t|1)`[[1]](times[1:10])
 [1] 1.0000000 0.9989518 0.9979047 0.9968586 0.9958137 0.9947699 0.9937272 0.9926855 0.9916450 0.9906055
exp_surv_model_surv_func$`S(t|0)`[[1]](times[1:10])
 [1] 1.0000000 0.9995855 0.9991711 0.9987569 0.9983429 0.9979290 0.9975153 0.9971018 0.9966885 0.9962753
## Apply all realizations
exp_surv_model_survival <-
    exp_surv_model_surv_func %>%
    mutate(times_df = list(times_df)) %>%
    mutate(times_df = pmap(list(times_df, `S(t|1)`, `S(t|0)`),
                           function(df, s1, s0) {df %>% mutate(s1 = s1(t),
                                                               s0 = s0(t))})) %>%
    select(-`S(t|1)`, -`S(t|0)`) %>%
    unnest(cols = c(times_df)) %>%
    gather(key = Strata, value = survival, s1, s0) %>%
    mutate(Strata = factor(Strata, # Strata is whether or not project has more than 20 developers working
                              levels = c("s1","s0"),
                              labels = c("author count <= 20","author count > 20")))

## Average on survival scale
exp_surv_model_survival_mean <-
    exp_surv_model_survival %>%
    group_by(Strata, t) %>%
    summarize(survival_mean = mean(survival),
              survival_95upper = quantile(survival, probs = 0.975),
              survival_95lower = quantile(survival, probs = 0.025))
`summarise()` has grouped output by 'Strata'. You can override using the `.groups` argument.
exp_surv_model_survival
# plot the graphs
(ggplot(data = exp_surv_model_survival, mapping = aes(x = t, y = survival, color = Strata, group = interaction(.chain,.draw,Strata))) 
 + geom_line(size = 0.1, alpha = 0.02) 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_mean, group = Strata)) 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_95upper, group = Strata), linetype = "dotted") 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_95lower, group = Strata), linetype = "dotted")
  + scale_y_continuous(labels = percent_format(), limits=c(0,1))
 +labs(x = "Time (Months)", y = "Survival probability")
 + theme_classic()
 + theme(axis.text.x = element_text(angle = 90, vjust = 0.5), legend.key = element_blank(), legend.position="top", plot.title = element_text(hjust = 0.5), strip.background = element_blank()))

LS0tCnRpdGxlOiAiQmF5ZXNpYW4gc3Vydml2YWwgYW5hbHlzaXMgdXNpbmcgYXV0aG9yIGNvdW50IGFzIGEgcHJlZGljdG9yIgpvdXRwdXQ6IGh0bWxfbm90ZWJvb2sKLS0tCmBgYHtyfQpsaWJyYXJ5KHJzdGFuKQpsaWJyYXJ5KHN1cnZpdmFsKQpsaWJyYXJ5KHRpZHl2ZXJzZSkKbGlicmFyeSh0aWR5YmF5ZXMpCmxpYnJhcnkoc2NhbGVzKQpgYGAKIAoKYGBge3J9CiMgZGF0YSwgcGFyYW1ldGVycywgbW9kZWwgYW5kIGdlbmVyYXRlZCBxdWFudGl0aWVzIGJsb2NrcwpTdGFuX2V4cG9uZW50aWFsX3N1cnZpdmFsX21vZGVsIDwtICIKZGF0YXsKICBpbnQgPGxvd2VyPTE+IE5fdW5jZW5zb3JlZDsKICBpbnQgPGxvd2VyPTE+IE5fY2Vuc29yZWQ7CiAgaW50IDxsb3dlcj0wPiBudW1Db3ZhcmlhdGVzOwogIG1hdHJpeFtOX2NlbnNvcmVkLCBudW1Db3ZhcmlhdGVzXSBYX2NlbnNvcmVkOwogIG1hdHJpeFtOX3VuY2Vuc29yZWQsIG51bUNvdmFyaWF0ZXNdIFhfdW5jZW5zb3JlZDsKICB2ZWN0b3IgPGxvd2VyPTA+W05fY2Vuc29yZWRdIHRpbWVzX2NlbnNvcmVkOwogIHZlY3RvciA8bG93ZXI9MD5bTl91bmNlbnNvcmVkXSB0aW1lc191bmNlbnNvcmVkOwp9CgpwYXJhbWV0ZXJzewogIHZlY3RvcltudW1Db3ZhcmlhdGVzXSBiZXRhOyAvL3JlZ3Jlc3Npb24gY29lZmZpY2llbnRzCiAgcmVhbCBhbHBoYTsgLy9pbnRlcmNlcHQKfQoKbW9kZWx7CiAgYmV0YSB+IG5vcm1hbCgwLDEwKTsgLy9wcmlvciBvbiByZWdyZXNzaW9uIGNvZWZmaWNpZW50cwogIGFscGhhIH4gbm9ybWFsKDAsMTApOyAvL3ByaW9yIG9uIGludGVyY2VwdAogIHRhcmdldCArPSBleHBvbmVudGlhbF9scGRmKHRpbWVzX3VuY2Vuc29yZWQgfCBleHAoYWxwaGErWF91bmNlbnNvcmVkICogYmV0YSkpOyAvL2xvZy1saWtlbGlob29kIHBhcnQgZm9yIHVuY2Vuc29yZWQgdGltZXMKICB0YXJnZXQgKz0gZXhwb25lbnRpYWxfbGNjZGYodGltZXNfY2Vuc29yZWQgfCBleHAoYWxwaGErWF9jZW5zb3JlZCAqIGJldGEpKTsgLy9sb2ctbGlrZWxpaG9vZCBmb3IgY2Vuc29yZWQgdGltZXMKfQoKZ2VuZXJhdGVkIHF1YW50aXRpZXN7CiAgdmVjdG9yW05fdW5jZW5zb3JlZF0gdGltZXNfdW5jZW5zb3JlZF9zYW1wbGVkOyAvL3ByZWRpY3Rpb24gb2YgZGVhdGgKICBmb3IoaSBpbiAxOk5fdW5jZW5zb3JlZCkgewogICAgdGltZXNfdW5jZW5zb3JlZF9zYW1wbGVkW2ldID0gZXhwb25lbnRpYWxfcm5nKGV4cChhbHBoYStYX3VuY2Vuc29yZWRbaSxdKiBiZXRhKSk7CiAgfQp9CiIKYGBgCgpgYGB7cn0KIyBwcmVwYXJlIHRoZSBkYXRhCnNldC5zZWVkKDQyKTsgCnJlcXVpcmUgKHRpZHl2ZXJzZSk7CmRhdGEgPC0gcmVhZF9jc3YoJy4uL2RhdGEvbmVjZXNzYXJ5X2ZpZWxkcy5jc3YnKQpOIDwtIG5yb3cgKGRhdGEpOwpkYXRhJGhpZ2hfYXV0aG9yX2NvdW50IDwtIGNhcjo6cmVjb2RlKGRhdGEkaGlnaF9hdXRob3JfY291bnQsICInVFJVRScgPSAwOyAnRkFMU0UnID0gMSIpCmRhdGEkY2Vuc29yZWQgPC0gY2FyOjpyZWNvZGUoZGF0YSRjZW5zb3JlZCwgIidUUlVFJyA9IDA7ICdGQUxTRScgPSAxIikKWCA8LSBhcy5tYXRyaXgocHVsbChkYXRhLCBoaWdoX2F1dGhvcl9jb3VudCkpOyAKaXNfY2Vuc29yZWQgPC0gcHVsbChkYXRhLCBjZW5zb3JlZCk9PTA7IAp0aW1lcyA8LSBwdWxsKGRhdGEsIGR1cmF0aW9uX21vbnRocyk7IAptc2tfY2Vuc29yZWQgPC0gaXNfY2Vuc29yZWQgPT0gMTsgCk5fY2Vuc29yZWQgPC0gc3VtKG1za19jZW5zb3JlZCk7CmBgYAoKYGBge3J9CiMgcHV0IGRhdGEgaW50byBhIGxpc3QgZm9yIFN0YW4KU3Rhbl9kYXRhIDwtIGxpc3QgKE5fdW5jZW5zb3JlZCA9IE4gLSBOX2NlbnNvcmVkLCAKICAgICAgICAgICAgICAgICAgICBOX2NlbnNvcmVkID0gTl9jZW5zb3JlZCwKICAgICAgICAgICAgICAgICAgICBudW1Db3ZhcmlhdGVzID0gbmNvbChYKSwgCiAgICAgICAgICAgICAgICAgICAgWF9jZW5zb3JlZCA9IGFzLm1hdHJpeChYW21za19jZW5zb3JlZCxdKSwKICAgICAgICAgICAgICAgICAgICBYX3VuY2Vuc29yZWQgPSBhcy5tYXRyaXgoWFshbXNrX2NlbnNvcmVkICxdKSwgCiAgICAgICAgICAgICAgICAgICAgdGltZXNfY2Vuc29yZWQgPSB0aW1lc1ttc2tfY2Vuc29yZWRdLAogICAgICAgICAgICAgICAgICAgIHRpbWVzX3VuY2Vuc29yZWQgPSB0aW1lc1shbXNrX2NlbnNvcmVkXSkKYGBgCgpgYGB7cn0KIyBmaXQgU3RhbiBtb2RlbApyZXF1aXJlKHJzdGFuKQpleHBfc3Vydl9tb2RlbF9maXQgPC0gc3VwcHJlc3NNZXNzYWdlcyhzdGFuKG1vZGVsX2NvZGUgPSBTdGFuX2V4cG9uZW50aWFsX3N1cnZpdmFsX21vZGVsLCBkYXRhID0gU3Rhbl9kYXRhKSkKYGBgCgpgYGB7cn0KIyBwcmludCBtb2RlbCBmaXQKcHJpbnQoZ2V0X3NlZWQoZXhwX3N1cnZfbW9kZWxfZml0KSkKYGBgCgpgYGB7cn0KIyBwcmludCBmaXQgc3VtbWFyeQpmaXRfc3VtbWFyeSA8LSBzdW1tYXJ5KGV4cF9zdXJ2X21vZGVsX2ZpdCkKcHJpbnQoZml0X3N1bW1hcnkkc3VtbWFyeSkKYGBgCgpgYGB7cn0KZXhwX3N1cnZfbW9kZWxfZHJhd3MgPC0gdGlkeWJheWVzOjp0aWR5X2RyYXdzKGV4cF9zdXJ2X21vZGVsX2ZpdCkKZXhwX3N1cnZfbW9kZWxfZHJhd3MKYGBgCiAKYGBge3J9CiMjIENvbnN0cnVjdG9yIGZvciBTdHJhdGEtc3BlY2lmaWMgc3Vydml2YWwgZnVuY3Rpb24KY29uc3RydWN0X3N1cnZpdmFsX2Z1bmN0aW9uIDwtIGZ1bmN0aW9uKGFscGhhLCBiZXRhLCB4KSB7CiAgICBmdW5jdGlvbih0KSB7CiAgICAgICAgbGFtYmRhIDwtIGV4cChhbHBoYSArIHgqYmV0YSkKICAgICAgICBleHAoLShsYW1iZGEgKiB0KSkKICAgIH0KfQoKIyMgUmFuZG9tIGZ1bmN0aW9ucwpleHBfc3Vydl9tb2RlbF9zdXJ2X2Z1bmMgPC0KICAgIGV4cF9zdXJ2X21vZGVsX2RyYXdzICU+JQogICAgc2VsZWN0KC5jaGFpbiwgLml0ZXJhdGlvbiwgLmRyYXcsIGFscGhhLCBgYmV0YVsxXWApICU+JQogICAgIyMgU2ltcGxpZnkgbmFtZQogICAgcmVuYW1lKGJldGEgPSBgYmV0YVsxXWApICU+JQogICAgIyMgQ29uc3RydWN0IHJlYWxpemF0aW9uIG9mIHJhbmRvbSBmdW5jdGlvbnMKICAgIG11dGF0ZShgUyh0fDEpYCA9IHBtYXAobGlzdChhbHBoYSwgYmV0YSksIGZ1bmN0aW9uKGEsYikge2NvbnN0cnVjdF9zdXJ2aXZhbF9mdW5jdGlvbihhLGIsMSl9KSwKICAgICAgICAgICBgUyh0fDApYCA9IHBtYXAobGlzdChhbHBoYSwgYmV0YSksIGZ1bmN0aW9uKGEsYikge2NvbnN0cnVjdF9zdXJ2aXZhbF9mdW5jdGlvbihhLGIsMCl9KSkKZXhwX3N1cnZfbW9kZWxfc3Vydl9mdW5jCmBgYAoKYGBge3J9CnRpbWVzIDwtIHNlcShmcm9tID0gMCwgdG8gPSAxNjUsIGJ5ID0gMC4xKQp0aW1lc19kZiA8LSBkYXRhX2ZyYW1lKHQgPSB0aW1lcykKCiMjIFRyeSBmaXJzdCByZWFsaXphdGlvbnMKZXhwX3N1cnZfbW9kZWxfc3Vydl9mdW5jJGBTKHR8MSlgW1sxXV0odGltZXNbMToxMF0pCmBgYAoKYGBge3J9CmV4cF9zdXJ2X21vZGVsX3N1cnZfZnVuYyRgUyh0fDApYFtbMV1dKHRpbWVzWzE6MTBdKQpgYGAKYGBge3J9CiMjIEFwcGx5IGFsbCByZWFsaXphdGlvbnMKZXhwX3N1cnZfbW9kZWxfc3Vydml2YWwgPC0KICAgIGV4cF9zdXJ2X21vZGVsX3N1cnZfZnVuYyAlPiUKICAgIG11dGF0ZSh0aW1lc19kZiA9IGxpc3QodGltZXNfZGYpKSAlPiUKICAgIG11dGF0ZSh0aW1lc19kZiA9IHBtYXAobGlzdCh0aW1lc19kZiwgYFModHwxKWAsIGBTKHR8MClgKSwKICAgICAgICAgICAgICAgICAgICAgICAgICAgZnVuY3Rpb24oZGYsIHMxLCBzMCkge2RmICU+JSBtdXRhdGUoczEgPSBzMSh0KSwKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgczAgPSBzMCh0KSl9KSkgJT4lCiAgICBzZWxlY3QoLWBTKHR8MSlgLCAtYFModHwwKWApICU+JQogICAgdW5uZXN0KGNvbHMgPSBjKHRpbWVzX2RmKSkgJT4lCiAgICBnYXRoZXIoa2V5ID0gU3RyYXRhLCB2YWx1ZSA9IHN1cnZpdmFsLCBzMSwgczApICU+JQogICAgbXV0YXRlKFN0cmF0YSA9IGZhY3RvcihTdHJhdGEsICMgU3RyYXRhIGlzIHdoZXRoZXIgb3Igbm90IHByb2plY3QgaGFzIG1vcmUgdGhhbiAyMCBkZXZlbG9wZXJzIHdvcmtpbmcKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgbGV2ZWxzID0gYygiczEiLCJzMCIpLAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICBsYWJlbHMgPSBjKCJhdXRob3IgY291bnQgPD0gMjAiLCJhdXRob3IgY291bnQgPiAyMCIpKSkKCiMjIEF2ZXJhZ2Ugb24gc3Vydml2YWwgc2NhbGUKZXhwX3N1cnZfbW9kZWxfc3Vydml2YWxfbWVhbiA8LQogICAgZXhwX3N1cnZfbW9kZWxfc3Vydml2YWwgJT4lCiAgICBncm91cF9ieShTdHJhdGEsIHQpICU+JQogICAgc3VtbWFyaXplKHN1cnZpdmFsX21lYW4gPSBtZWFuKHN1cnZpdmFsKSwKICAgICAgICAgICAgICBzdXJ2aXZhbF85NXVwcGVyID0gcXVhbnRpbGUoc3Vydml2YWwsIHByb2JzID0gMC45NzUpLAogICAgICAgICAgICAgIHN1cnZpdmFsXzk1bG93ZXIgPSBxdWFudGlsZShzdXJ2aXZhbCwgcHJvYnMgPSAwLjAyNSkpCgpleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbApgYGAKCmBgYHtyfQojIHBsb3QgdGhlIGdyYXBocwooZ2dwbG90KGRhdGEgPSBleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbCwgbWFwcGluZyA9IGFlcyh4ID0gdCwgeSA9IHN1cnZpdmFsLCBjb2xvciA9IFN0cmF0YSwgZ3JvdXAgPSBpbnRlcmFjdGlvbiguY2hhaW4sLmRyYXcsU3RyYXRhKSkpIAogKyBnZW9tX2xpbmUoc2l6ZSA9IDAuMSwgYWxwaGEgPSAwLjAyKSAKICsgZ2VvbV9saW5lKGRhdGEgPSBleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbF9tZWFuLCBtYXBwaW5nID0gYWVzKHkgPSBzdXJ2aXZhbF9tZWFuLCBncm91cCA9IFN0cmF0YSkpIAogKyBnZW9tX2xpbmUoZGF0YSA9IGV4cF9zdXJ2X21vZGVsX3N1cnZpdmFsX21lYW4sIG1hcHBpbmcgPSBhZXMoeSA9IHN1cnZpdmFsXzk1dXBwZXIsIGdyb3VwID0gU3RyYXRhKSwgbGluZXR5cGUgPSAiZG90dGVkIikgCiArIGdlb21fbGluZShkYXRhID0gZXhwX3N1cnZfbW9kZWxfc3Vydml2YWxfbWVhbiwgbWFwcGluZyA9IGFlcyh5ID0gc3Vydml2YWxfOTVsb3dlciwgZ3JvdXAgPSBTdHJhdGEpLCBsaW5ldHlwZSA9ICJkb3R0ZWQiKQogICsgc2NhbGVfeV9jb250aW51b3VzKGxhYmVscyA9IHBlcmNlbnRfZm9ybWF0KCksIGxpbWl0cz1jKDAsMSkpCiArbGFicyh4ID0gIlRpbWUgKE1vbnRocykiLCB5ID0gIlN1cnZpdmFsIHByb2JhYmlsaXR5IikKICsgdGhlbWVfY2xhc3NpYygpCiArIHRoZW1lKGF4aXMudGV4dC54ID0gZWxlbWVudF90ZXh0KGFuZ2xlID0gOTAsIHZqdXN0ID0gMC41KSwgbGVnZW5kLmtleSA9IGVsZW1lbnRfYmxhbmsoKSwgbGVnZW5kLnBvc2l0aW9uPSJ0b3AiLCBwbG90LnRpdGxlID0gZWxlbWVudF90ZXh0KGhqdXN0ID0gMC41KSwgc3RyaXAuYmFja2dyb3VuZCA9IGVsZW1lbnRfYmxhbmsoKSkpCmBgYAo=